install.packages("DT")
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
method from
print.tbl_lazy
print.tbl_sql
── Attaching packages ───────────────────────────────────────────────────────────────────────────────────── tidyverse 1.3.0 ──
✓ tibble 3.0.4 ✓ dplyr 1.0.2
✓ tidyr 1.1.2 ✓ stringr 1.4.0
✓ readr 1.4.0 ✓ forcats 0.5.0
✓ purrr 0.3.4
── Conflicts ──────────────────────────────────────────────────────────────────────────────────────── tidyverse_conflicts() ──
x dplyr::filter() masks plotly::filter(), stats::filter()
x dplyr::lag() masks stats::lag()
library(forecast)
Registered S3 method overwritten by 'quantmod':
method from
as.zoo.data.frame zoo
This is forecast 8.13
Need help getting started? Try the online textbook FPP:
http://otexts.com/fpp2/
library(caTools)
library(earth)
Loading required package: Formula
Loading required package: plotmo
Loading required package: plotrix
Loading required package: TeachingDemos
Attaching package: ‘TeachingDemos’
The following object is masked from ‘package:plotly’:
subplot
library(randomForest)
randomForest 4.6-14
Type rfNews() to see new features/changes/bug fixes.
Attaching package: ‘randomForest’
The following object is masked from ‘package:dplyr’:
combine
The following object is masked from ‘package:ggplot2’:
margin
library(kernlab)
Attaching package: ‘kernlab’
The following object is masked from ‘package:purrr’:
cross
The following object is masked from ‘package:ggplot2’:
alpha
library(h2o)
----------------------------------------------------------------------
Your next step is to start H2O:
> h2o.init()
For H2O package documentation, ask for help:
> ??h2o
After starting H2O, you can use the Web UI at http://localhost:54321
For more information visit https://docs.h2o.ai
----------------------------------------------------------------------
Attaching package: ‘h2o’
The following objects are masked from ‘package:stats’:
cor, sd, var
The following objects are masked from ‘package:base’:
&&, %*%, %in%, ||, apply, as.factor, as.numeric, colnames, colnames<-, ifelse, is.character, is.factor,
is.numeric, log, log10, log1p, log2, round, signif, trunc
library(neuralnet)
Attaching package: ‘neuralnet’
The following object is masked from ‘package:dplyr’:
compute
library(Metrics)
Attaching package: ‘Metrics’
The following object is masked from ‘package:forecast’:
accuracy
library(caret)
Loading required package: lattice
Attaching package: ‘caret’
The following objects are masked from ‘package:Metrics’:
precision, recall
The following object is masked from ‘package:purrr’:
lift
library(hms)
library(lubridate)
Attaching package: ‘lubridate’
The following object is masked from ‘package:hms’:
hms
The following objects are masked from ‘package:h2o’:
day, hour, month, week, year
The following objects are masked from ‘package:base’:
date, intersect, setdiff, union
library(ggplot2)
library(gganimate)
library(gapminder)
library(gifski)
library(png)
library(shiny)
library(plotly)
library(shinydashboard)
library(DT)
Mall_customers <- read.csv("Mall_Customers.csv", stringsAsFactors = T)
Mall_customers <- Mall_customers %>% rename(
Annual_Income = Annual.Income..k..,
Spending_Score = Spending.Score..1.100.)
Mall_customers <- Mall_customers %>%
select(Genre, Age, Annual_Income, Spending_Score)
Mall_customers <- Mall_customers %>% mutate(
Gender = if_else(Genre == "Male", true = 1, false = 0)
) %>%
select(-Genre)
Mall_customers
summary(Mall_customers)
Age Annual_Income Spending_Score Gender
Min. :18.00 Min. : 15.00 Min. : 1.00 Min. :0.00
1st Qu.:28.75 1st Qu.: 41.50 1st Qu.:34.75 1st Qu.:0.00
Median :36.00 Median : 61.50 Median :50.00 Median :0.00
Mean :38.85 Mean : 60.56 Mean :50.20 Mean :0.44
3rd Qu.:49.00 3rd Qu.: 78.00 3rd Qu.:73.00 3rd Qu.:1.00
Max. :70.00 Max. :137.00 Max. :99.00 Max. :1.00
set.seed(123)
sample = sample.split(Mall_customers$Gender, SplitRatio = .75)
train = subset(Mall_customers, sample == TRUE)
test = subset(Mall_customers, sample == FALSE)
# Random Forest Model
RFModel <- randomForest(Gender ~ ., data = train, mtry = 3, ntree = 64)
The response has five or fewer unique values. Are you sure you want to do regression?
RFModel
Call:
randomForest(formula = Gender ~ ., data = train, mtry = 3, ntree = 64)
Type of random forest: regression
Number of trees: 64
No. of variables tried at each split: 3
Mean of squared residuals: 0.2689081
% Var explained: -9.13
plot(RFModel)
# Prediction
RFPrediction <- predict(RFModel, test, type = "class")
RFPrediction
2 3 11 13 15 19 24 29 32 34 35
0.29843750 0.59791667 0.51250000 0.48281250 0.26458333 0.15338542 0.48802083 0.14583333 0.29895833 0.57890625 0.33984375
39 44 45 48 54 56 61 63 73 76 77
0.14973958 0.29973958 0.17369792 0.09427083 0.46458333 0.12161458 0.50494792 0.70416667 0.63984375 0.39713542 0.21588542
78 82 84 85 87 95 106 107 121 128 137
0.21354167 0.22213542 0.18385417 0.43671875 0.33723958 0.10729167 0.37187500 0.93046875 0.28020833 0.79479167 0.82239583
138 149 150 152 153 157 159 160 162 166 182
0.12473958 0.35130208 0.36067708 0.41822917 0.71067708 0.83203125 0.84140625 0.08463542 0.05312500 0.49843750 0.32656250
184 188 189 191 199 200
0.42864583 0.57578125 0.27708333 0.26223958 0.51041667 0.53385417
# Generate Confusion Matrix
RFconfusionMatrix <- table(RFPrediction,
test$Gender,
dnn = c("Prediction", "Actual"))
# Calculate Accuracy
RFaccuracy <- round(sum(diag(RFconfusionMatrix))/ sum(RFconfusionMatrix), digits = 4)
cat("Random Forest Accuracy:", RFaccuracy)
Random Forest Accuracy: 0.02
# MSE
#Metrics::mse(test, RFPrediction)
#SVM
# Support Vector Machine Model
SVMmodel <- ksvm(Gender ~ .,
data = train,
kernel = "vanilladot")
Setting default kernel parameters
SVMmodel
Support Vector Machine object of class "ksvm"
SV type: eps-svr (regression)
parameter : epsilon = 0.1 cost C = 1
Linear (vanilla) kernel function.
Number of Support Vectors : 135
Objective Function Value : -119.3168
Training error : 1.606914
# Prediction
SVMpred <- predict(SVMmodel, test)
# Generate Confusion Matrix
SVMconfusionMatrix <- table(SVMpred,
test$Gender,
dnn = c("Prediction", "Actual"))
# Calculate Accuracy
SVMaccuracy <- round(sum(diag(SVMconfusionMatrix))/ sum(SVMconfusionMatrix), digits = 4)
cat("Support Vector Machine Accuracy:", SVMaccuracy)
Support Vector Machine Accuracy: 0
# MSE
#Metrics::mse(test, SVMpred)
# Neural Networks Model
NNModel <- neuralnet(Gender ~ .,
data = train)
NNModel
$call
neuralnet(formula = Gender ~ ., data = train)
$response
Gender
1 1
4 0
5 0
6 0
7 0
8 0
9 1
10 0
12 0
14 0
16 1
17 0
18 1
20 0
21 1
22 1
23 0
25 0
26 1
27 0
28 1
30 0
31 1
33 1
36 0
37 0
38 0
40 0
41 0
42 1
43 1
46 0
47 0
49 0
50 0
51 0
52 1
53 0
55 0
57 0
58 1
59 0
60 1
62 1
64 0
65 1
66 1
67 0
68 0
69 1
70 0
71 1
72 0
74 0
75 1
79 0
80 0
81 1
83 1
86 1
88 0
89 0
90 0
91 0
92 1
93 1
94 0
96 1
97 0
98 0
99 1
100 1
101 0
102 0
103 1
104 1
105 1
108 1
109 1
110 1
111 1
112 0
113 0
114 1
115 0
116 0
117 0
118 0
119 0
120 0
122 0
123 0
124 1
125 0
126 0
127 1
129 1
130 1
131 1
132 1
133 0
134 0
135 1
136 0
139 1
140 0
141 0
142 1
143 0
144 0
145 1
146 1
147 1
148 0
151 1
154 0
155 0
156 0
158 0
161 0
163 1
164 0
165 1
167 1
168 0
169 0
170 1
171 1
172 1
173 1
174 1
175 0
176 0
177 1
178 1
179 1
180 1
181 0
183 1
185 0
186 1
187 0
190 0
192 0
193 1
194 0
195 0
196 0
197 0
198 1
$covariate
Age Annual_Income Spending_Score
1 19 15 39
4 23 16 77
5 31 17 40
6 22 17 76
7 35 18 6
8 23 18 94
9 64 19 3
10 30 19 72
12 35 19 99
14 24 20 77
16 22 20 79
17 35 21 35
18 20 21 66
20 35 23 98
21 35 24 35
22 25 24 73
23 46 25 5
25 54 28 14
26 29 28 82
27 45 28 32
28 35 28 61
30 23 29 87
31 60 30 4
33 53 33 4
36 21 33 81
37 42 34 17
38 30 34 73
40 20 37 75
41 65 38 35
42 24 38 92
43 48 39 36
46 24 39 65
47 50 40 55
49 29 40 42
50 31 40 42
51 49 42 52
52 33 42 60
53 31 43 54
55 50 43 45
57 51 44 50
58 69 44 46
59 27 46 51
60 53 46 46
62 19 46 55
64 54 47 59
65 63 48 51
66 18 48 59
67 43 48 50
68 68 48 48
69 19 48 59
70 32 48 47
71 70 49 55
72 47 49 42
74 60 50 56
75 59 54 47
79 23 54 52
80 49 54 42
81 57 54 51
83 67 54 41
86 48 54 46
88 22 57 55
89 34 58 60
90 50 58 46
91 68 59 55
92 18 59 41
93 48 60 49
94 40 60 40
96 24 60 52
97 47 60 47
98 27 60 50
99 48 61 42
100 20 61 49
101 23 62 41
102 49 62 48
103 67 62 59
104 26 62 55
105 49 62 56
108 54 63 46
109 68 63 43
110 66 63 48
111 65 63 52
112 19 63 54
113 38 64 42
114 19 64 46
115 18 65 48
116 19 65 50
117 63 65 43
118 49 65 59
119 51 67 43
120 50 67 57
122 38 67 40
123 40 69 58
124 39 69 91
125 23 70 29
126 31 70 77
127 43 71 35
129 59 71 11
130 38 71 75
131 47 71 9
132 39 71 75
133 25 72 34
134 31 72 71
135 20 73 5
136 29 73 88
139 19 74 10
140 35 74 72
141 57 75 5
142 32 75 93
143 28 76 40
144 32 76 87
145 25 77 12
146 28 77 97
147 48 77 36
148 32 77 74
151 43 78 17
154 38 78 76
155 47 78 16
156 27 78 89
158 30 78 78
161 56 79 35
163 19 81 5
164 31 81 93
165 50 85 26
167 42 86 20
168 33 86 95
169 36 87 27
170 32 87 63
171 40 87 13
172 28 87 75
173 36 87 10
174 36 87 92
175 52 88 13
176 30 88 86
177 58 88 15
178 27 88 69
179 59 93 14
180 35 93 90
181 37 97 32
183 46 98 15
185 41 99 39
186 30 99 97
187 54 101 24
190 36 103 85
192 32 103 69
193 33 113 8
194 38 113 91
195 47 120 16
196 35 120 79
197 45 126 28
198 32 126 74
$model.list
$model.list$response
[1] "Gender"
$model.list$variables
[1] "Age" "Annual_Income" "Spending_Score"
$err.fct
function (x, y)
{
1/2 * (y - x)^2
}
<bytecode: 0x7f8e78d00c38>
<environment: 0x7f8e78d034c0>
attr(,"type")
[1] "sse"
$act.fct
function (x)
{
1/(1 + exp(-x))
}
<bytecode: 0x7f8e78cf6710>
<environment: 0x7f8e78cfbd28>
attr(,"type")
[1] "logistic"
$linear.output
[1] TRUE
$data
Age Annual_Income Spending_Score Gender
1 19 15 39 1
4 23 16 77 0
5 31 17 40 0
6 22 17 76 0
7 35 18 6 0
8 23 18 94 0
9 64 19 3 1
10 30 19 72 0
12 35 19 99 0
14 24 20 77 0
16 22 20 79 1
17 35 21 35 0
18 20 21 66 1
20 35 23 98 0
21 35 24 35 1
22 25 24 73 1
23 46 25 5 0
25 54 28 14 0
26 29 28 82 1
27 45 28 32 0
28 35 28 61 1
30 23 29 87 0
31 60 30 4 1
33 53 33 4 1
36 21 33 81 0
37 42 34 17 0
38 30 34 73 0
40 20 37 75 0
41 65 38 35 0
42 24 38 92 1
43 48 39 36 1
46 24 39 65 0
47 50 40 55 0
49 29 40 42 0
50 31 40 42 0
51 49 42 52 0
52 33 42 60 1
53 31 43 54 0
55 50 43 45 0
57 51 44 50 0
58 69 44 46 1
59 27 46 51 0
60 53 46 46 1
62 19 46 55 1
64 54 47 59 0
65 63 48 51 1
66 18 48 59 1
67 43 48 50 0
68 68 48 48 0
69 19 48 59 1
70 32 48 47 0
71 70 49 55 1
72 47 49 42 0
74 60 50 56 0
75 59 54 47 1
79 23 54 52 0
80 49 54 42 0
81 57 54 51 1
83 67 54 41 1
86 48 54 46 1
88 22 57 55 0
89 34 58 60 0
90 50 58 46 0
91 68 59 55 0
92 18 59 41 1
93 48 60 49 1
94 40 60 40 0
96 24 60 52 1
97 47 60 47 0
98 27 60 50 0
99 48 61 42 1
100 20 61 49 1
101 23 62 41 0
102 49 62 48 0
103 67 62 59 1
104 26 62 55 1
105 49 62 56 1
108 54 63 46 1
109 68 63 43 1
110 66 63 48 1
111 65 63 52 1
112 19 63 54 0
113 38 64 42 0
114 19 64 46 1
115 18 65 48 0
116 19 65 50 0
117 63 65 43 0
118 49 65 59 0
119 51 67 43 0
120 50 67 57 0
122 38 67 40 0
123 40 69 58 0
124 39 69 91 1
125 23 70 29 0
126 31 70 77 0
127 43 71 35 1
129 59 71 11 1
130 38 71 75 1
131 47 71 9 1
132 39 71 75 1
133 25 72 34 0
134 31 72 71 0
135 20 73 5 1
136 29 73 88 0
139 19 74 10 1
140 35 74 72 0
141 57 75 5 0
142 32 75 93 1
143 28 76 40 0
144 32 76 87 0
145 25 77 12 1
146 28 77 97 1
147 48 77 36 1
148 32 77 74 0
151 43 78 17 1
154 38 78 76 0
155 47 78 16 0
156 27 78 89 0
158 30 78 78 0
161 56 79 35 0
163 19 81 5 1
164 31 81 93 0
165 50 85 26 1
167 42 86 20 1
168 33 86 95 0
169 36 87 27 0
170 32 87 63 1
171 40 87 13 1
172 28 87 75 1
173 36 87 10 1
174 36 87 92 1
175 52 88 13 0
176 30 88 86 0
177 58 88 15 1
178 27 88 69 1
179 59 93 14 1
180 35 93 90 1
181 37 97 32 0
183 46 98 15 1
185 41 99 39 0
186 30 99 97 1
187 54 101 24 0
190 36 103 85 0
192 32 103 69 0
193 33 113 8 1
194 38 113 91 0
195 47 120 16 0
196 35 120 79 0
197 45 126 28 0
198 32 126 74 1
$exclude
NULL
$net.result
$net.result[[1]]
[,1]
1 0.4400289
4 0.4400289
5 0.4400289
6 0.4400289
7 0.4397426
8 0.4400289
9 0.4400228
10 0.4400289
12 0.4400289
14 0.4400289
16 0.4400289
17 0.4400289
18 0.4400289
20 0.4400289
21 0.4400289
22 0.4400289
23 0.4400005
25 0.4400288
26 0.4400289
27 0.4400289
28 0.4400289
30 0.4400289
31 0.4400268
33 0.4400241
36 0.4400289
37 0.4400287
38 0.4400289
40 0.4400289
41 0.4400289
42 0.4400289
43 0.4400289
46 0.4400289
47 0.4400289
49 0.4400289
50 0.4400289
51 0.4400289
52 0.4400289
53 0.4400289
55 0.4400289
57 0.4400289
58 0.4400289
59 0.4400289
60 0.4400289
62 0.4400289
64 0.4400289
65 0.4400289
66 0.4400289
67 0.4400289
68 0.4400289
69 0.4400289
70 0.4400289
71 0.4400289
72 0.4400289
74 0.4400289
75 0.4400289
79 0.4400289
80 0.4400289
81 0.4400289
83 0.4400289
86 0.4400289
88 0.4400289
89 0.4400289
90 0.4400289
91 0.4400289
92 0.4400289
93 0.4400289
94 0.4400289
96 0.4400289
97 0.4400289
98 0.4400289
99 0.4400289
100 0.4400289
101 0.4400289
102 0.4400289
103 0.4400289
104 0.4400289
105 0.4400289
108 0.4400289
109 0.4400289
110 0.4400289
111 0.4400289
112 0.4400289
113 0.4400289
114 0.4400289
115 0.4400289
116 0.4400289
117 0.4400289
118 0.4400289
119 0.4400289
120 0.4400289
122 0.4400289
123 0.4400289
124 0.4400289
125 0.4400289
126 0.4400289
127 0.4400289
129 0.4400289
130 0.4400289
131 0.4400289
132 0.4400289
133 0.4400289
134 0.4400289
135 0.4400224
136 0.4400289
139 0.4400279
140 0.4400289
141 0.4400289
142 0.4400289
143 0.4400289
144 0.4400289
145 0.4400288
146 0.4400289
147 0.4400289
148 0.4400289
151 0.4400289
154 0.4400289
155 0.4400289
156 0.4400289
158 0.4400289
161 0.4400289
163 0.4400260
164 0.4400289
165 0.4400289
167 0.4400289
168 0.4400289
169 0.4400289
170 0.4400289
171 0.4400289
172 0.4400289
173 0.4400289
174 0.4400289
175 0.4400289
176 0.4400289
177 0.4400289
178 0.4400289
179 0.4400289
180 0.4400289
181 0.4400289
183 0.4400289
185 0.4400289
186 0.4400289
187 0.4400289
190 0.4400289
192 0.4400289
193 0.4400289
194 0.4400289
195 0.4400289
196 0.4400289
197 0.4400289
198 0.4400289
$weights
$weights[[1]]
$weights[[1]][[1]]
[,1]
[1,] 1.3857495
[2,] -0.1667256
[3,] -0.1208851
[4,] -0.3709971
$weights[[1]][[2]]
[,1]
[1,] 0.4400289
[2,] -2.0001810
$generalized.weights
$generalized.weights[[1]]
[,1] [,2] [,3]
1 1.933043e-08 1.401561e-08 4.301400e-08
4 6.629387e-15 4.806665e-15 1.475169e-14
5 1.416524e-09 1.027057e-09 3.152045e-09
6 1.005781e-14 7.292461e-15 2.238061e-14
7 1.937212e-04 1.404584e-04 4.310676e-04
8 9.492917e-18 6.882880e-18 2.112360e-17
9 4.152155e-06 3.010538e-06 9.239360e-06
10 9.177581e-15 6.654244e-15 2.042192e-14
12 1.779893e-19 1.290519e-19 3.960611e-19
14 3.459923e-15 2.508632e-15 7.699007e-15
16 2.299526e-15 1.667281e-15 5.116898e-15
17 2.865518e-09 2.077656e-09 6.376340e-09
18 3.536233e-13 2.563961e-13 7.868812e-13
20 1.590439e-19 1.153154e-19 3.539038e-19
21 1.993903e-09 1.445688e-09 4.436825e-09
22 7.964299e-15 5.774549e-15 1.772213e-14
23 1.924732e-05 1.395535e-05 4.282906e-05
25 1.251743e-07 9.075817e-08 2.785374e-07
26 8.941667e-17 6.483194e-17 1.989696e-16
27 7.063031e-10 5.121080e-10 1.571663e-09
28 7.953032e-14 5.766380e-14 1.769706e-13
30 3.370984e-17 2.444147e-17 7.501102e-17
31 1.476684e-06 1.070676e-06 3.285911e-06
33 3.300986e-06 2.393394e-06 7.345341e-06
36 2.687237e-16 1.948392e-16 5.979628e-16
37 1.472471e-07 1.067622e-07 3.276538e-07
38 1.033024e-15 7.489981e-16 2.298680e-15
40 1.813206e-15 1.314672e-15 4.034738e-15
41 2.468696e-12 1.789939e-12 5.493334e-12
42 1.503977e-18 1.090465e-18 3.346643e-18
43 2.569106e-11 1.862741e-11 5.716764e-11
46 2.985628e-14 2.164742e-14 6.643608e-14
47 1.416276e-14 1.026877e-14 3.151493e-14
49 5.838513e-11 4.233239e-11 1.299184e-10
50 4.182984e-11 3.032891e-11 9.307961e-11
51 3.998735e-14 2.899300e-14 8.897969e-14
52 2.961246e-14 2.147064e-14 6.589354e-14
53 3.392524e-13 2.459764e-13 7.549031e-13
55 4.025955e-13 2.919036e-13 8.958540e-13
57 4.724449e-14 3.425482e-14 1.051283e-13
58 1.036323e-14 7.513904e-15 2.306022e-14
59 1.399657e-12 1.014828e-12 3.114512e-12
60 1.172259e-13 8.499517e-14 2.608507e-13
62 1.204475e-12 8.733096e-13 2.680193e-12
64 7.071798e-16 5.127437e-16 1.573614e-15
65 2.718530e-15 1.971082e-15 6.049263e-15
66 2.533484e-13 1.836914e-13 5.637500e-13
67 1.105646e-13 8.016536e-14 2.460280e-13
68 3.594680e-15 2.606338e-15 7.998868e-15
69 2.144422e-13 1.554822e-13 4.771759e-13
70 2.106058e-12 1.527006e-12 4.686392e-12
71 1.700162e-16 1.232710e-16 3.783195e-16
72 9.782742e-13 7.093019e-13 2.176852e-12
74 5.507426e-16 3.993183e-16 1.225510e-15
75 1.130982e-14 8.200236e-15 2.516658e-14
79 7.153753e-13 5.186859e-13 1.591850e-12
80 3.829534e-13 2.776620e-13 8.521465e-13
81 3.579182e-15 2.595101e-15 7.964383e-15
83 2.760094e-14 2.001218e-14 6.141751e-14
86 1.025808e-13 7.437662e-14 2.282623e-13
88 1.932309e-13 1.401029e-13 4.299768e-13
89 3.623021e-15 2.626887e-15 8.061933e-15
90 4.531594e-14 3.285651e-14 1.008369e-13
91 7.084498e-17 5.136645e-17 1.576440e-16
92 5.326199e-11 3.861783e-11 1.185184e-10
93 1.631930e-14 1.183238e-14 3.631364e-14
94 1.746078e-12 1.266001e-12 3.885366e-12
96 2.931754e-13 2.125681e-13 6.523728e-13
97 4.049054e-14 2.935783e-14 9.009939e-14
98 3.733770e-13 2.707186e-13 8.308370e-13
99 1.941135e-13 1.407429e-13 4.319407e-13
100 1.540373e-12 1.116854e-12 3.427632e-12
101 1.610194e-11 1.167478e-11 3.582998e-11
102 1.571868e-14 1.139689e-14 3.497714e-14
103 1.320474e-17 9.574153e-18 2.938314e-17
104 5.419335e-14 3.929312e-14 1.205908e-13
105 8.080532e-16 5.858824e-16 1.798077e-15
108 1.270926e-14 9.214904e-15 2.828060e-14
109 3.747782e-15 2.717345e-15 8.339550e-15
110 8.184276e-16 5.934044e-16 1.821162e-15
111 2.192301e-16 1.589537e-16 4.878300e-16
112 2.235756e-13 1.621044e-13 4.974995e-13
113 7.155446e-13 5.188086e-13 1.592227e-12
114 3.853902e-12 2.794288e-12 8.575689e-12
115 1.921166e-12 1.392950e-12 4.274972e-12
116 7.743065e-13 5.614142e-13 1.722984e-12
117 6.773527e-15 4.911175e-15 1.507243e-14
118 1.847459e-16 1.339508e-16 4.110958e-16
119 3.932887e-14 2.851556e-14 8.751445e-14
120 2.578762e-16 1.869742e-16 5.738252e-16
122 1.045638e-12 7.581446e-13 2.326751e-12
123 7.402354e-16 5.367108e-16 1.647169e-15
124 4.214552e-21 3.055779e-21 9.378205e-21
125 5.252239e-10 3.808158e-10 1.168726e-09
126 2.554018e-18 1.851801e-18 5.683190e-18
127 1.790411e-12 1.298145e-12 3.984015e-12
129 9.148563e-10 6.633205e-10 2.035735e-09
130 1.479490e-18 1.072711e-18 3.292156e-18
131 1.420669e-08 1.030062e-08 3.161268e-08
132 1.252288e-18 9.079766e-19 2.786586e-18
133 4.622948e-11 3.351888e-11 1.028697e-10
134 1.857618e-17 1.346874e-17 4.133565e-17
135 4.436047e-06 3.216375e-06 9.871076e-06
136 4.189851e-20 3.037869e-20 9.323240e-20
139 7.266003e-07 5.268247e-07 1.616828e-06
140 5.166578e-18 3.746050e-18 1.149665e-17
141 7.292930e-09 5.287770e-09 1.622820e-08
142 3.121512e-21 2.263266e-21 6.945977e-21
143 1.866225e-12 1.353114e-12 4.152717e-12
144 2.562108e-20 1.857667e-20 5.701192e-20
145 8.853288e-08 6.419114e-08 1.970030e-07
146 1.082703e-21 7.850188e-22 2.409228e-21
147 2.598916e-13 1.884355e-13 5.783098e-13
148 2.822824e-18 2.046701e-18 6.281337e-18
151 6.104509e-10 4.426100e-10 1.358373e-09
154 4.380181e-19 3.175869e-19 9.746762e-19
155 4.540888e-10 3.292390e-10 1.010437e-09
156 2.204925e-20 1.598691e-20 4.906392e-20
158 7.916114e-19 5.739612e-19 1.761491e-18
161 7.792033e-14 5.649646e-14 1.733880e-13
163 1.992540e-06 1.444700e-06 4.433793e-06
164 1.785561e-21 1.294628e-21 3.973223e-21
165 2.892001e-12 2.096858e-12 6.435271e-12
167 9.009379e-11 6.532289e-11 2.004764e-10
168 3.328250e-22 2.413162e-22 7.406010e-22
169 1.617294e-11 1.172626e-11 3.598797e-11
170 4.989174e-17 3.617422e-17 1.110189e-16
171 1.495771e-09 1.084515e-09 3.328385e-09
172 1.132913e-18 8.214233e-19 2.520953e-18
173 8.868768e-09 6.430338e-09 1.973475e-08
174 5.443250e-22 3.946651e-22 1.211230e-21
175 1.792543e-10 1.299691e-10 3.988759e-10
176 1.214895e-20 8.808646e-21 2.703379e-20
177 3.138900e-11 2.275873e-11 6.984668e-11
178 1.098593e-17 7.965396e-18 2.444585e-17
179 2.103744e-11 1.525328e-11 4.681242e-11
180 6.539009e-22 4.741136e-22 1.455058e-21
181 6.394016e-13 4.636008e-13 1.422794e-12
183 6.929089e-11 5.023966e-11 1.541858e-10
185 1.919942e-14 1.392062e-14 4.272248e-14
186 5.428745e-23 3.936135e-23 1.208002e-22
187 4.506245e-13 3.267272e-13 1.002728e-12
190 1.056128e-21 7.657503e-22 2.350093e-21
192 7.785758e-19 5.645097e-19 1.732484e-18
193 1.325374e-09 9.609678e-10 2.949216e-09
194 2.438772e-23 1.768242e-23 5.426747e-23
195 2.832384e-12 2.053632e-12 6.302609e-12
196 1.480340e-21 1.073326e-21 3.294046e-21
197 2.231031e-14 1.617618e-14 4.964481e-14
198 7.554349e-21 5.477313e-21 1.680991e-20
$startweights
$startweights[[1]]
$startweights[[1]][[1]]
[,1]
[1,] 0.89214945
[2,] -0.83512559
[3,] 0.53658644
[4,] 0.04892574
$startweights[[1]][[2]]
[,1]
[1,] -0.4243741
[2,] -1.5065810
$result.matrix
[,1]
error 18.479874846
reached.threshold 0.004433007
steps 35.000000000
Intercept.to.1layhid1 1.385749453
Age.to.1layhid1 -0.166725588
Annual_Income.to.1layhid1 -0.120885097
Spending_Score.to.1layhid1 -0.370997144
Intercept.to.Gender 0.440028940
1layhid1.to.Gender -2.000180983
attr(,"class")
[1] "nn"
# Visualization
NNModelPlot <- plot(NNModel)
NNModelPlot
NULL
# Prediction
NNPrediction <- predict(NNModel, test)
# Generate Confusion Matrix
NNconfusionMatrix <- table(NNPrediction,
test$Gender,
dnn = c("Prediction", "Actual"))
# Calculate Accuracy
NNaccuracy <- round(sum(diag(NNconfusionMatrix))/ sum(NNconfusionMatrix), digits = 4)
cat("Neural Network Model Accuracy:", NNaccuracy)
Neural Network Model Accuracy: 0.04
# MSE
#Metrics::mse(test, NNPrediction)
# Neural Networks Model
DNNModel <- neuralnet(Gender ~ .,
data = train,
hidden = 4)
DNNModel
$call
neuralnet(formula = Gender ~ ., data = train, hidden = 4)
$response
Gender
1 1
4 0
5 0
6 0
7 0
8 0
9 1
10 0
12 0
14 0
16 1
17 0
18 1
20 0
21 1
22 1
23 0
25 0
26 1
27 0
28 1
30 0
31 1
33 1
36 0
37 0
38 0
40 0
41 0
42 1
43 1
46 0
47 0
49 0
50 0
51 0
52 1
53 0
55 0
57 0
58 1
59 0
60 1
62 1
64 0
65 1
66 1
67 0
68 0
69 1
70 0
71 1
72 0
74 0
75 1
79 0
80 0
81 1
83 1
86 1
88 0
89 0
90 0
91 0
92 1
93 1
94 0
96 1
97 0
98 0
99 1
100 1
101 0
102 0
103 1
104 1
105 1
108 1
109 1
110 1
111 1
112 0
113 0
114 1
115 0
116 0
117 0
118 0
119 0
120 0
122 0
123 0
124 1
125 0
126 0
127 1
129 1
130 1
131 1
132 1
133 0
134 0
135 1
136 0
139 1
140 0
141 0
142 1
143 0
144 0
145 1
146 1
147 1
148 0
151 1
154 0
155 0
156 0
158 0
161 0
163 1
164 0
165 1
167 1
168 0
169 0
170 1
171 1
172 1
173 1
174 1
175 0
176 0
177 1
178 1
179 1
180 1
181 0
183 1
185 0
186 1
187 0
190 0
192 0
193 1
194 0
195 0
196 0
197 0
198 1
$covariate
Age Annual_Income Spending_Score
1 19 15 39
4 23 16 77
5 31 17 40
6 22 17 76
7 35 18 6
8 23 18 94
9 64 19 3
10 30 19 72
12 35 19 99
14 24 20 77
16 22 20 79
17 35 21 35
18 20 21 66
20 35 23 98
21 35 24 35
22 25 24 73
23 46 25 5
25 54 28 14
26 29 28 82
27 45 28 32
28 35 28 61
30 23 29 87
31 60 30 4
33 53 33 4
36 21 33 81
37 42 34 17
38 30 34 73
40 20 37 75
41 65 38 35
42 24 38 92
43 48 39 36
46 24 39 65
47 50 40 55
49 29 40 42
50 31 40 42
51 49 42 52
52 33 42 60
53 31 43 54
55 50 43 45
57 51 44 50
58 69 44 46
59 27 46 51
60 53 46 46
62 19 46 55
64 54 47 59
65 63 48 51
66 18 48 59
67 43 48 50
68 68 48 48
69 19 48 59
70 32 48 47
71 70 49 55
72 47 49 42
74 60 50 56
75 59 54 47
79 23 54 52
80 49 54 42
81 57 54 51
83 67 54 41
86 48 54 46
88 22 57 55
89 34 58 60
90 50 58 46
91 68 59 55
92 18 59 41
93 48 60 49
94 40 60 40
96 24 60 52
97 47 60 47
98 27 60 50
99 48 61 42
100 20 61 49
101 23 62 41
102 49 62 48
103 67 62 59
104 26 62 55
105 49 62 56
108 54 63 46
109 68 63 43
110 66 63 48
111 65 63 52
112 19 63 54
113 38 64 42
114 19 64 46
115 18 65 48
116 19 65 50
117 63 65 43
118 49 65 59
119 51 67 43
120 50 67 57
122 38 67 40
123 40 69 58
124 39 69 91
125 23 70 29
126 31 70 77
127 43 71 35
129 59 71 11
130 38 71 75
131 47 71 9
132 39 71 75
133 25 72 34
134 31 72 71
135 20 73 5
136 29 73 88
139 19 74 10
140 35 74 72
141 57 75 5
142 32 75 93
143 28 76 40
144 32 76 87
145 25 77 12
146 28 77 97
147 48 77 36
148 32 77 74
151 43 78 17
154 38 78 76
155 47 78 16
156 27 78 89
158 30 78 78
161 56 79 35
163 19 81 5
164 31 81 93
165 50 85 26
167 42 86 20
168 33 86 95
169 36 87 27
170 32 87 63
171 40 87 13
172 28 87 75
173 36 87 10
174 36 87 92
175 52 88 13
176 30 88 86
177 58 88 15
178 27 88 69
179 59 93 14
180 35 93 90
181 37 97 32
183 46 98 15
185 41 99 39
186 30 99 97
187 54 101 24
190 36 103 85
192 32 103 69
193 33 113 8
194 38 113 91
195 47 120 16
196 35 120 79
197 45 126 28
198 32 126 74
$model.list
$model.list$response
[1] "Gender"
$model.list$variables
[1] "Age" "Annual_Income" "Spending_Score"
$err.fct
function (x, y)
{
1/2 * (y - x)^2
}
<bytecode: 0x7f8e78d00c38>
<environment: 0x7f8e89b4a058>
attr(,"type")
[1] "sse"
$act.fct
function (x)
{
1/(1 + exp(-x))
}
<bytecode: 0x7f8e78cf6710>
<environment: 0x7f8e89b49bc0>
attr(,"type")
[1] "logistic"
$linear.output
[1] TRUE
$data
Age Annual_Income Spending_Score Gender
1 19 15 39 1
4 23 16 77 0
5 31 17 40 0
6 22 17 76 0
7 35 18 6 0
8 23 18 94 0
9 64 19 3 1
10 30 19 72 0
12 35 19 99 0
14 24 20 77 0
16 22 20 79 1
17 35 21 35 0
18 20 21 66 1
20 35 23 98 0
21 35 24 35 1
22 25 24 73 1
23 46 25 5 0
25 54 28 14 0
26 29 28 82 1
27 45 28 32 0
28 35 28 61 1
30 23 29 87 0
31 60 30 4 1
33 53 33 4 1
36 21 33 81 0
37 42 34 17 0
38 30 34 73 0
40 20 37 75 0
41 65 38 35 0
42 24 38 92 1
43 48 39 36 1
46 24 39 65 0
47 50 40 55 0
49 29 40 42 0
50 31 40 42 0
51 49 42 52 0
52 33 42 60 1
53 31 43 54 0
55 50 43 45 0
57 51 44 50 0
58 69 44 46 1
59 27 46 51 0
60 53 46 46 1
62 19 46 55 1
64 54 47 59 0
65 63 48 51 1
66 18 48 59 1
67 43 48 50 0
68 68 48 48 0
69 19 48 59 1
70 32 48 47 0
71 70 49 55 1
72 47 49 42 0
74 60 50 56 0
75 59 54 47 1
79 23 54 52 0
80 49 54 42 0
81 57 54 51 1
83 67 54 41 1
86 48 54 46 1
88 22 57 55 0
89 34 58 60 0
90 50 58 46 0
91 68 59 55 0
92 18 59 41 1
93 48 60 49 1
94 40 60 40 0
96 24 60 52 1
97 47 60 47 0
98 27 60 50 0
99 48 61 42 1
100 20 61 49 1
101 23 62 41 0
102 49 62 48 0
103 67 62 59 1
104 26 62 55 1
105 49 62 56 1
108 54 63 46 1
109 68 63 43 1
110 66 63 48 1
111 65 63 52 1
112 19 63 54 0
113 38 64 42 0
114 19 64 46 1
115 18 65 48 0
116 19 65 50 0
117 63 65 43 0
118 49 65 59 0
119 51 67 43 0
120 50 67 57 0
122 38 67 40 0
123 40 69 58 0
124 39 69 91 1
125 23 70 29 0
126 31 70 77 0
127 43 71 35 1
129 59 71 11 1
130 38 71 75 1
131 47 71 9 1
132 39 71 75 1
133 25 72 34 0
134 31 72 71 0
135 20 73 5 1
136 29 73 88 0
139 19 74 10 1
140 35 74 72 0
141 57 75 5 0
142 32 75 93 1
143 28 76 40 0
144 32 76 87 0
145 25 77 12 1
146 28 77 97 1
147 48 77 36 1
148 32 77 74 0
151 43 78 17 1
154 38 78 76 0
155 47 78 16 0
156 27 78 89 0
158 30 78 78 0
161 56 79 35 0
163 19 81 5 1
164 31 81 93 0
165 50 85 26 1
167 42 86 20 1
168 33 86 95 0
169 36 87 27 0
170 32 87 63 1
171 40 87 13 1
172 28 87 75 1
173 36 87 10 1
174 36 87 92 1
175 52 88 13 0
176 30 88 86 0
177 58 88 15 1
178 27 88 69 1
179 59 93 14 1
180 35 93 90 1
181 37 97 32 0
183 46 98 15 1
185 41 99 39 0
186 30 99 97 1
187 54 101 24 0
190 36 103 85 0
192 32 103 69 0
193 33 113 8 1
194 38 113 91 0
195 47 120 16 0
196 35 120 79 0
197 45 126 28 0
198 32 126 74 1
$exclude
NULL
$net.result
$net.result[[1]]
[,1]
1 3.972611e-01
4 3.972611e-01
5 3.972611e-01
6 3.972611e-01
7 -9.514633e-05
8 3.972611e-01
9 1.005854e+00
10 3.972611e-01
12 3.972611e-01
14 3.972611e-01
16 3.972611e-01
17 3.972611e-01
18 3.972611e-01
20 3.972611e-01
21 3.972611e-01
22 3.972611e-01
23 1.627349e-03
25 3.972383e-01
26 3.972611e-01
27 3.972611e-01
28 3.972611e-01
30 3.972611e-01
31 1.005218e+00
33 9.853036e-01
36 3.972611e-01
37 4.514743e-01
38 3.972611e-01
40 3.972611e-01
41 3.972611e-01
42 3.972611e-01
43 3.972611e-01
46 3.972611e-01
47 3.972611e-01
49 3.972611e-01
50 3.972611e-01
51 3.972611e-01
52 3.972611e-01
53 3.972611e-01
55 3.972611e-01
57 3.972611e-01
58 3.972611e-01
59 3.972611e-01
60 3.972611e-01
62 3.972611e-01
64 3.972611e-01
65 3.972611e-01
66 3.972611e-01
67 3.972611e-01
68 3.972611e-01
69 3.972611e-01
70 3.972611e-01
71 3.972611e-01
72 3.972611e-01
74 3.972611e-01
75 3.972611e-01
79 3.972611e-01
80 3.972611e-01
81 3.972611e-01
83 3.972611e-01
86 3.972611e-01
88 3.972611e-01
89 3.972611e-01
90 3.972611e-01
91 3.972611e-01
92 3.972611e-01
93 3.972611e-01
94 3.972611e-01
96 3.972611e-01
97 3.972611e-01
98 3.972611e-01
99 3.972611e-01
100 3.972611e-01
101 3.972611e-01
102 3.972611e-01
103 3.972611e-01
104 3.972611e-01
105 3.972611e-01
108 3.972611e-01
109 3.972611e-01
110 3.972611e-01
111 3.972611e-01
112 3.972611e-01
113 3.972611e-01
114 3.972611e-01
115 3.972611e-01
116 3.972611e-01
117 3.972611e-01
118 3.972611e-01
119 3.972611e-01
120 3.972611e-01
122 3.972611e-01
123 3.972611e-01
124 3.972611e-01
125 4.084511e-01
126 3.972611e-01
127 3.972611e-01
129 3.968792e-01
130 3.972611e-01
131 1.010497e+00
132 3.972611e-01
133 3.972611e-01
134 3.972611e-01
135 9.924202e-01
136 3.972611e-01
139 1.013135e+00
140 3.972611e-01
141 9.709446e-04
142 3.972611e-01
143 3.972611e-01
144 3.972611e-01
145 1.013160e+00
146 3.972611e-01
147 3.972611e-01
148 3.972611e-01
151 6.011413e-01
154 3.972611e-01
155 3.987682e-01
156 3.972611e-01
158 3.972611e-01
161 3.972611e-01
163 1.005297e+00
164 3.972611e-01
165 3.972611e-01
167 3.972870e-01
168 3.972611e-01
169 3.972611e-01
170 3.972611e-01
171 1.013161e+00
172 3.972611e-01
173 1.013089e+00
174 3.972611e-01
175 4.042871e-01
176 3.972611e-01
177 3.972609e-01
178 3.972611e-01
179 3.972602e-01
180 3.972611e-01
181 3.972611e-01
183 9.585422e-01
185 3.972611e-01
186 3.972611e-01
187 3.972611e-01
190 3.972611e-01
192 3.972611e-01
193 1.012875e+00
194 3.972611e-01
195 4.591411e-01
196 3.972611e-01
197 3.972611e-01
198 3.972611e-01
$weights
$weights[[1]]
$weights[[1]][[1]]
[,1] [,2] [,3] [,4]
[1,] 0.64370953 11.99907497 6.39385284 -164.58821176
[2,] 1.01488201 0.06800324 0.10120237 2.36576666
[3,] 1.05156374 -0.04655805 -0.05647392 -0.09091469
[4,] 0.06874939 -2.77347561 -1.57964573 4.15618428
$weights[[1]][[2]]
[,1]
[1,] -0.4072916
[2,] 1.4204539
[3,] 3.2063151
[4,] -3.1643466
[5,] -0.6159011
$generalized.weights
$generalized.weights[[1]]
[,1] [,2] [,3]
1 -4.121405e-24 2.299866e-24 6.433011e-23
4 -4.978877e-50 2.778361e-50 7.771421e-49
5 -2.554941e-24 1.425733e-24 3.987951e-23
6 -2.063882e-49 1.151707e-49 3.221469e-48
7 6.844786e+02 -3.699887e+02 -8.303683e+03
8 -9.672631e-62 5.397615e-62 1.509780e-60
9 1.829650e+01 -7.085530e-01 3.257178e+01
10 -2.298111e-46 1.282414e-46 3.587072e-45
12 -1.143617e-64 6.381725e-65 1.785048e-63
14 -4.395192e-50 2.452647e-50 6.860359e-49
16 -1.524094e-51 8.504896e-52 2.378925e-50
17 -8.226956e-21 4.590885e-21 1.284128e-19
18 -9.749686e-43 5.440614e-43 1.521807e-41
20 -4.427993e-64 2.470952e-64 6.911558e-63
21 -6.944808e-21 3.875409e-21 1.084000e-19
22 -2.152533e-47 1.201177e-47 3.359842e-46
23 8.421476e+00 -8.937711e+00 -9.741736e+02
25 -9.708591e-06 5.396131e-06 1.508186e-04
26 -1.723330e-53 9.616690e-54 2.689908e-52
27 -1.742471e-18 9.723505e-19 2.719785e-17
28 -8.067482e-39 4.501894e-39 1.259236e-37
30 -3.295908e-57 1.839215e-57 5.144512e-56
31 -3.491258e-01 3.555325e-01 3.360318e+01
33 1.110369e-01 -1.502252e-01 -1.928312e+01
36 -2.806436e-53 1.566075e-53 4.380505e-52
37 -4.723145e-01 1.815071e-02 -8.297628e-01
38 -2.029761e-47 1.132667e-47 3.168210e-46
40 -2.644154e-49 1.475517e-49 4.127203e-48
41 -6.559047e-20 3.660143e-20 1.023787e-18
42 -8.147659e-61 4.546635e-61 1.271750e-59
43 -2.286153e-21 1.275741e-21 3.568407e-20
46 -2.566581e-42 1.432228e-42 4.006120e-41
47 -2.442785e-34 1.363146e-34 3.812890e-33
49 -2.417183e-26 1.348860e-26 3.772928e-25
50 -2.959462e-26 1.651467e-26 4.619359e-25
51 -2.254111e-32 1.257861e-32 3.518392e-31
52 -1.450433e-38 8.093848e-39 2.263950e-37
53 -1.463042e-34 8.164207e-35 2.283630e-33
55 -1.494929e-27 8.342146e-28 2.333402e-26
57 -5.806156e-31 3.240007e-31 9.062703e-30
58 -1.991332e-27 1.111223e-27 3.108227e-26
59 -9.418149e-33 5.255606e-33 1.470058e-31
60 -3.522614e-28 1.965723e-28 5.498371e-27
62 -7.554867e-36 4.215840e-36 1.179223e-34
64 -4.445104e-37 2.480500e-37 6.938265e-36
65 -3.214931e-31 1.794027e-31 5.018115e-30
66 -1.099251e-38 6.134146e-39 1.715797e-37
67 -2.061439e-31 1.150344e-31 3.217655e-30
68 -6.095706e-29 3.401585e-29 9.514655e-28
69 -1.216322e-38 6.787436e-39 1.898530e-37
70 -7.740994e-30 4.319704e-30 1.208275e-28
71 -1.112195e-33 6.206379e-34 1.736001e-32
72 -8.988806e-26 5.016020e-26 1.403043e-24
74 -7.872397e-35 4.393031e-35 1.228785e-33
75 -8.478771e-29 4.731406e-29 1.323433e-27
79 -8.239808e-34 4.598057e-34 1.286134e-32
80 -8.298005e-26 4.630533e-26 1.295218e-24
81 -1.248257e-31 6.965643e-32 1.948377e-30
83 -2.489628e-24 1.389286e-24 3.886006e-23
86 -1.351750e-28 7.543166e-29 2.109917e-27
88 -5.499108e-36 3.068665e-36 8.583438e-35
89 -6.501722e-39 3.628154e-39 1.014840e-37
90 -1.320366e-28 7.368035e-29 2.060931e-27
91 -5.164338e-34 2.881854e-34 8.060902e-33
92 -1.317871e-26 7.354113e-27 2.057037e-25
93 -8.426442e-31 4.702205e-31 1.315265e-29
94 -5.601625e-25 3.125873e-25 8.743454e-24
96 -6.496970e-34 3.625502e-34 1.014098e-32
97 -1.793723e-29 1.000951e-29 2.799784e-28
98 -2.073140e-32 1.156873e-32 3.235919e-31
99 -5.050541e-26 2.818352e-26 7.883279e-25
100 -4.682441e-32 2.612941e-32 7.308719e-31
101 -1.845237e-26 1.029697e-26 2.880191e-25
102 -4.041799e-30 2.255443e-30 6.308755e-29
103 -7.101636e-37 3.962923e-37 1.108479e-35
104 -6.215373e-36 3.468363e-36 9.701440e-35
105 -1.313177e-35 7.327918e-36 2.049709e-34
108 -1.492345e-28 8.327730e-29 2.329369e-27
109 -7.035362e-26 3.925941e-26 1.098134e-24
110 -2.134156e-29 1.190922e-29 3.331157e-28
111 -3.476552e-32 1.940019e-32 5.426475e-31
112 -1.403818e-35 7.833722e-36 2.191189e-34
113 -1.549682e-26 8.647686e-27 2.418865e-25
114 -4.083532e-30 2.278732e-30 6.373896e-29
115 -1.480796e-31 8.263280e-32 2.311342e-30
116 -6.956391e-33 3.881872e-33 1.085808e-31
117 -3.788573e-26 2.114136e-26 5.913501e-25
118 -9.697283e-38 5.411372e-38 1.513628e-36
119 -1.004624e-26 5.606099e-27 1.568095e-25
120 -2.257418e-36 1.259706e-36 3.523554e-35
122 -3.081247e-25 1.719427e-25 4.809452e-24
123 -1.510120e-37 8.426920e-38 2.357114e-36
124 -3.133452e-60 1.748559e-60 4.890937e-59
125 -1.075735e-01 4.133972e-03 -1.889854e-01
126 -5.300582e-51 2.957882e-51 8.273563e-50
127 -1.097834e-21 6.126238e-22 1.713585e-20
129 -1.655472e-04 9.034022e-05 2.515457e-03
130 -2.396191e-49 1.337146e-49 3.740163e-48
131 2.542646e-02 -1.418323e-02 -3.957868e-01
132 -2.651387e-49 1.479553e-49 4.138492e-48
133 -1.124194e-12 4.320195e-14 -1.974986e-12
134 -6.186706e-47 3.452365e-47 9.656694e-46
135 -5.392072e-01 2.300007e-01 -5.679732e+00
136 -1.038713e-58 5.796329e-59 1.621305e-57
139 2.076933e-04 -1.158637e-04 -3.234819e-03
140 -1.706797e-47 9.524435e-48 2.664103e-46
141 -4.865177e+01 2.286092e+01 -9.324116e+01
142 -4.668108e-62 2.604943e-62 7.286348e-61
143 -6.737107e-26 3.759505e-26 1.051580e-24
144 -5.765050e-58 3.217069e-58 8.998541e-57
145 1.365155e-05 -7.617792e-06 -2.130484e-04
146 -5.013673e-65 2.797778e-65 7.825733e-64
147 -2.673655e-22 1.491979e-22 4.173250e-21
148 -4.515286e-49 2.519664e-49 7.047812e-48
151 -1.345735e+00 5.171562e-02 -2.364191e+00
154 -3.325109e-50 1.855509e-50 5.190090e-49
155 -1.483538e-02 5.701191e-04 -2.606262e-02
156 -1.318043e-59 7.355069e-60 2.057304e-58
158 -6.282464e-52 3.505801e-52 9.806161e-51
161 -2.604337e-21 1.453298e-21 4.065053e-20
163 3.972103e-01 -1.562723e-01 6.800524e+00
164 -3.006293e-62 1.677601e-62 4.692458e-61
165 -1.510471e-15 8.428876e-16 2.357661e-14
167 -2.552543e-04 9.809240e-06 -4.484311e-04
168 -1.178257e-63 6.575026e-64 1.839116e-62
169 -9.464178e-11 3.637050e-12 -1.662657e-10
170 -9.031664e-42 5.039936e-42 1.409733e-40
171 7.302291e-06 -4.072352e-06 -1.138916e-04
172 -3.528452e-50 1.968981e-50 5.507483e-49
173 5.591040e-04 -3.119351e-04 -8.714711e-03
174 -1.724499e-61 9.623218e-62 2.691734e-60
175 -6.825971e-02 2.623839e-03 -1.198965e-01
176 -1.160421e-57 6.475492e-58 1.811275e-56
177 -1.008190e-07 5.625387e-08 1.573455e-06
178 -3.938173e-46 2.197617e-46 6.147008e-45
179 -4.082824e-07 2.277756e-07 6.370819e-06
180 -2.615871e-60 1.459734e-60 4.083057e-59
181 -1.574842e-20 8.788083e-21 2.458136e-19
183 -2.963300e+00 1.138775e-01 -5.205931e+00
185 -3.324841e-25 1.855360e-25 5.189671e-24
186 -1.772068e-65 9.888665e-66 2.765982e-64
187 -2.160532e-14 1.205641e-14 3.372328e-13
190 -4.430594e-57 2.472403e-57 6.915618e-56
192 -2.800001e-46 1.562484e-46 4.370461e-45
193 2.247472e-03 -1.250261e-03 -3.430572e-02
194 -2.360009e-61 1.316955e-61 3.683686e-60
195 -5.302821e-01 2.037835e-02 -9.316007e-01
196 -2.003326e-53 1.117915e-53 3.126948e-52
197 -3.816915e-18 2.129952e-18 5.957740e-17
198 -2.837239e-50 1.583263e-50 4.428584e-49
$startweights
$startweights[[1]]
$startweights[[1]][[1]]
[,1] [,2] [,3] [,4]
[1,] 0.4437095 -0.009397413 -0.4199274 -0.71938925
[2,] 0.8148820 0.031137453 -0.7091934 -0.04007548
[3,] 0.8515637 -1.429511768 0.3394200 -0.38164388
[4,] -0.1312506 -0.627410443 -2.4553174 1.15547632
$startweights[[1]][[2]]
[,1]
[1,] -0.34435803
[2,] 1.48338742
[3,] 0.08828987
[4,] 0.74028344
[5,] -1.06531448
$result.matrix
[,1]
error 1.615756e+01
reached.threshold 9.354496e-03
steps 2.485400e+04
Intercept.to.1layhid1 6.437095e-01
Age.to.1layhid1 1.014882e+00
Annual_Income.to.1layhid1 1.051564e+00
Spending_Score.to.1layhid1 6.874939e-02
Intercept.to.1layhid2 1.199907e+01
Age.to.1layhid2 6.800324e-02
Annual_Income.to.1layhid2 -4.655805e-02
Spending_Score.to.1layhid2 -2.773476e+00
Intercept.to.1layhid3 6.393853e+00
Age.to.1layhid3 1.012024e-01
Annual_Income.to.1layhid3 -5.647392e-02
Spending_Score.to.1layhid3 -1.579646e+00
Intercept.to.1layhid4 -1.645882e+02
Age.to.1layhid4 2.365767e+00
Annual_Income.to.1layhid4 -9.091469e-02
Spending_Score.to.1layhid4 4.156184e+00
Intercept.to.Gender -4.072916e-01
1layhid1.to.Gender 1.420454e+00
1layhid2.to.Gender 3.206315e+00
1layhid3.to.Gender -3.164347e+00
1layhid4.to.Gender -6.159011e-01
attr(,"class")
[1] "nn"
# Visualization
plot(DNNModel)
# Prediction
DNNPrediction <- predict(DNNModel, test)
# Generate Confusion Matrix
DNNconfusionMatrix <- table(DNNPrediction,
test$Gender,
dnn = c("Prediction", "Actual"))
# Calculate Accuracy
DNNaccuracy <- round(sum(diag(DNNconfusionMatrix))/ sum(DNNconfusionMatrix), digits = 4)
cat("Deep Neural Network Model Accuracy:", DNNaccuracy)
Deep Neural Network Model Accuracy: 0
# MSE
#Metrics::mse(test, NNPrediction)
localH2O = h2o.init()
Connection successful!
R is connected to the H2O cluster:
H2O cluster uptime: 3 hours 4 minutes
H2O cluster timezone: America/New_York
H2O data parsing timezone: UTC
H2O cluster version: 3.32.0.1
H2O cluster version age: 6 months and 24 days !!!
H2O cluster name: H2O_started_from_R_xiochirinos_nah310
H2O cluster total nodes: 1
H2O cluster total memory: 1.38 GB
H2O cluster total cores: 8
H2O cluster allowed cores: 8
H2O cluster healthy: TRUE
H2O Connection ip: localhost
H2O Connection port: 54321
H2O Connection proxy: NA
H2O Internal Security: FALSE
H2O API Extensions: Amazon S3, XGBoost, Algos, AutoML, Core V3, TargetEncoder, Core V4
R Version: R version 4.0.3 (2020-10-10)
Your H2O cluster version is too old (6 months and 24 days)!
Please download and install the latest version from http://h2o.ai/download/
# Convert the data frame to an H2O Data Frame
autoMall_customers <- as.h2o(Mall_customers)
|
| | 0%
|
|====================================================================================================================| 100%
# Sample Data
autoSplit <- h2o.splitFrame(data = autoMall_customers, ratios = c(.75))
AMLtrain <- autoSplit[[1]]
AMLtestValidation <- autoSplit[[2]]
testValidationSplit <- h2o.splitFrame(data = AMLtestValidation, ratios = c(.75))
AMLtest <- testValidationSplit[[1]]
AMLvalidation <- testValidationSplit[[2]]
# AutoML
autoMLModel <- h2o.automl(y = "Gender",
x = c("Age", "Annual_Income", "Spending_Score"),
training_frame = AMLtrain,
validation_frame = AMLvalidation,
balance_classes = TRUE,
max_runtime_secs = 60,
seed = 1234)
|
| | 0%
16:01:53.249: User specified a validation frame with cross-validation still enabled. Please note that the models will still be validated using cross-validation only, the validation frame will be used to provide purely informative validation metrics on the trained models.
|
|==== | 3%
|
|========= | 8%
|
|============= | 11%
|
|=================== | 16%
|
|====================== | 19%
|
|=============================== | 27%
16:02:02.317: Skipping training of model GBM_5_AutoML_20210503_160153 due to exception: water.exceptions.H2OModelBuilderIllegalArgumentException: Illegal argument(s) for GBM model: GBM_5_AutoML_20210503_160153. Details: ERRR on field: _min_rows: The dataset size is too small to split for min_rows=100.0: must have at least 200.0 (weighted) rows, but have only 148.0.
16:02:03.326: DeepLearning_1_AutoML_20210503_160153 [DeepLearning def_1] failed: water.exceptions.H2OModelBuilderIllegalArgumentException: Illegal argument(s) for DeepLearning model: DeepLearning_1_AutoML_20210503_160153_cv_1. Details: ERRR on field: _balance_classes: balance_classes requires classification.
|
|====================================== | 32%
|
|===================================================================== | 59%
|
|======================================================================================== | 76%
|
|========================================================================================= | 77%
|
|========================================================================================== | 77%
|
|=========================================================================================== | 78%
|
|============================================================================================ | 79%
|
|============================================================================================= | 80%
|
|============================================================================================== | 81%
|
|=============================================================================================== | 82%
|
|================================================================================================ | 83%
|
|================================================================================================= | 84%
|
|================================================================================================== | 84%
|
|=================================================================================================== | 85%
|
|==================================================================================================== | 86%
|
|===================================================================================================== | 87%
|
|====================================================================================================== | 88%
|
|======================================================================================================= | 89%
|
|======================================================================================================== | 90%
|
|========================================================================================================= | 90%
|
|========================================================================================================== | 91%
|
|============================================================================================================== | 95%
|
|====================================================================================================================| 100%
# Prediction
AMLprediction = h2o.predict(object = autoMLModel, newdata = AMLtest)
|
| | 0%
|
|====================================================================================================================| 100%
AutoMLtable <- as.data.frame(h2o.get_leaderboard(object = autoMLModel, extra_columns = 'ALL'))
AutoMLtable
# Performance
print(h2o.performance(autoMLModel@leader, AMLtest))
H2ORegressionMetrics: stackedensemble
MSE: 0.2491534
RMSE: 0.4991527
MAE: 0.4936293
RMSLE: 0.3473586
Mean Residual Deviance : 0.2491534
# MSE
#Metrics::mse(AMLtest, AMLprediction)
Mall_customers_Plot1 <- read.csv("Mall_Customers.csv")
Mall_customers_Plot1 <- Mall_customers_Plot1 %>% rename(
Annual_Income = Annual.Income..k..,
Spending_Score = Spending.Score..1.100.,
Gender = Genre)
Mall_customers_Plot1 <- Mall_customers_Plot1 %>%
select(Gender, Age, Annual_Income, Spending_Score)
Mall_customers_Plot1$Gender <- as.factor(Mall_customers_Plot1$Gender)
Mall_customers_Plot1
NA
AnnualIncome_SpendingScore <- ggplot(Mall_customers_Plot, aes(x = Annual_Income, y = Spending_Score, color = Gender)) +
geom_line(alpha = 0.7) +
theme_minimal() +
theme(legend.position="none") +
theme(axis.text.x = element_text(face = "bold", size = 10)) +
theme(axis.text.y = element_text(face = "bold", size = 10)) +
theme(axis.title.x = element_text(size = 15, face = "bold"),
axis.title.y = element_text(size = 15, face = "bold")) +
labs(title = "Annual Income vs. Spending Score",
x = 'Annual Income',
y = 'Spending Score') +
transition_reveal(Annual_Income)
AnnualIncome_SpendingScore
Inserting image 1 at 0.00s (1%)...
Inserting image 2 at 0.10s (2%)...
Inserting image 3 at 0.20s (3%)...
Inserting image 4 at 0.30s (4%)...
Inserting image 5 at 0.40s (5%)...
Inserting image 6 at 0.50s (6%)...
Inserting image 7 at 0.60s (7%)...
Inserting image 8 at 0.70s (8%)...
Inserting image 9 at 0.80s (9%)...
Inserting image 10 at 0.90s (10%)...
Inserting image 11 at 1.00s (11%)...
Inserting image 12 at 1.10s (12%)...
Inserting image 13 at 1.20s (13%)...
Inserting image 14 at 1.30s (14%)...
Inserting image 15 at 1.40s (15%)...
Inserting image 16 at 1.50s (16%)...
Inserting image 17 at 1.60s (17%)...
Inserting image 18 at 1.70s (18%)...
Inserting image 19 at 1.80s (19%)...
Inserting image 20 at 1.90s (20%)...
Inserting image 21 at 2.00s (21%)...
Inserting image 22 at 2.10s (22%)...
Inserting image 23 at 2.20s (23%)...
Inserting image 24 at 2.30s (24%)...
Inserting image 25 at 2.40s (25%)...
Inserting image 26 at 2.50s (26%)...
Inserting image 27 at 2.60s (27%)...
Inserting image 28 at 2.70s (28%)...
Inserting image 29 at 2.80s (29%)...
Inserting image 30 at 2.90s (30%)...
Inserting image 31 at 3.00s (31%)...
Inserting image 32 at 3.10s (32%)...
Inserting image 33 at 3.20s (33%)...
Inserting image 34 at 3.30s (34%)...
Inserting image 35 at 3.40s (35%)...
Inserting image 36 at 3.50s (36%)...
Inserting image 37 at 3.60s (37%)...
Inserting image 38 at 3.70s (38%)...
Inserting image 39 at 3.80s (39%)...
Inserting image 40 at 3.90s (40%)...
Inserting image 41 at 4.00s (41%)...
Inserting image 42 at 4.10s (42%)...
Inserting image 43 at 4.20s (43%)...
Inserting image 44 at 4.30s (44%)...
Inserting image 45 at 4.40s (45%)...
Inserting image 46 at 4.50s (46%)...
Inserting image 47 at 4.60s (47%)...
Inserting image 48 at 4.70s (48%)...
Inserting image 49 at 4.80s (49%)...
Inserting image 50 at 4.90s (50%)...
Inserting image 51 at 5.00s (51%)...
Inserting image 52 at 5.10s (52%)...
Inserting image 53 at 5.20s (53%)...
Inserting image 54 at 5.30s (54%)...
Inserting image 55 at 5.40s (55%)...
Inserting image 56 at 5.50s (56%)...
Inserting image 57 at 5.60s (57%)...
Inserting image 58 at 5.70s (58%)...
Inserting image 59 at 5.80s (59%)...
Inserting image 60 at 5.90s (60%)...
Inserting image 61 at 6.00s (61%)...
Inserting image 62 at 6.10s (62%)...
Inserting image 63 at 6.20s (63%)...
Inserting image 64 at 6.30s (64%)...
Inserting image 65 at 6.40s (65%)...
Inserting image 66 at 6.50s (66%)...
Inserting image 67 at 6.60s (67%)...
Inserting image 68 at 6.70s (68%)...
Inserting image 69 at 6.80s (69%)...
Inserting image 70 at 6.90s (70%)...
Inserting image 71 at 7.00s (71%)...
Inserting image 72 at 7.10s (72%)...
Inserting image 73 at 7.20s (73%)...
Inserting image 74 at 7.30s (74%)...
Inserting image 75 at 7.40s (75%)...
Inserting image 76 at 7.50s (76%)...
Inserting image 77 at 7.60s (77%)...
Inserting image 78 at 7.70s (78%)...
Inserting image 79 at 7.80s (79%)...
Inserting image 80 at 7.90s (80%)...
Inserting image 81 at 8.00s (81%)...
Inserting image 82 at 8.10s (82%)...
Inserting image 83 at 8.20s (83%)...
Inserting image 84 at 8.30s (84%)...
Inserting image 85 at 8.40s (85%)...
Inserting image 86 at 8.50s (86%)...
Inserting image 87 at 8.60s (87%)...
Inserting image 88 at 8.70s (88%)...
Inserting image 89 at 8.80s (89%)...
Inserting image 90 at 8.90s (90%)...
Inserting image 91 at 9.00s (91%)...
Inserting image 92 at 9.10s (92%)...
Inserting image 93 at 9.20s (93%)...
Inserting image 94 at 9.30s (94%)...
Inserting image 95 at 9.40s (95%)...
Inserting image 96 at 9.50s (96%)...
Inserting image 97 at 9.60s (97%)...
Inserting image 98 at 9.70s (98%)...
Inserting image 99 at 9.80s (99%)...
Inserting image 100 at 9.90s (100%)...
Encoding to gif... done!
Age_SpendingScore <- ggplot(Mall_customers_Plot, aes(x = Age, y = Spending_Score, fill = Gender)) +
geom_bar(alpha = 0.7, stat = "identity", position = "dodge") +
theme_minimal() +
theme(legend.position="none") +
theme(axis.text.x = element_text(face = "bold", size = 10, angle = 90)) +
theme(axis.text.y = element_text(face = "bold", size = 10)) +
theme(axis.title.x = element_text(size = 15, face = "bold"),
axis.title.y = element_text(size = 15, face = "bold")) +
labs(x = 'Age',
y = 'Spending Score')
ggplotly(Age_SpendingScore)
Age_AnnualIncome <- ggplot(Mall_customers_Plot1, aes(x = Age, y = Annual_Income, fill = Gender)) +
geom_bar(alpha = 0.7, stat = "identity", position = "dodge") +
theme_minimal() +
theme(legend.position="none") +
theme(axis.text.x = element_text(face = "bold", size = 10, angle = 90)) +
theme(axis.text.y = element_text(face = "bold", size = 10)) +
theme(axis.title.x = element_text(size = 15, face = "bold"),
axis.title.y = element_text(size = 15, face = "bold")) +
labs(x = 'Age',
y = 'Annual Income')
ggplotly(Age_AnnualIncome)
rsconnect::setAccountInfo(name='xiochirinos',
token='0F58DAD4CDCAC9786C37995CD1527BE4',
secret='mwRCIHucKohwJ5R6Q5RLUAQkYNL0bX1LtiHn4boh')
library(rsconnect)
Attaching package: ‘rsconnect’
The following object is masked from ‘package:shiny’:
serverInfo
rsconnect::deployApp('~/Desktop/MS_BIG_DATA/DATA VISUALIZATION/Final Project/Final_Project')
Preparing to deploy application...DONE
Uploading bundle for application: 4077877...DONE
Deploying bundle: 4556395 for application: 4077877 ...
Waiting for task: 924636169
building: Parsing manifest
building: Building image: 5234364
building: Fetching packages
building: Installing packages
building: Installing files
building: Pushing image: 5234364
deploying: Starting instances
rollforward: Activating new instances
terminating: Stopping old instances
Application successfully deployed to https://xiochirinos.shinyapps.io/final_project/